Load libraries
# https://github.com/pbs-assess/gfvelocities/blob/main/R/make_prediction_grid.R
library(tidyverse)
#> ── Attaching packages ─────────────────────────────────────── tidyverse 1.3.2 ──
#> ✔ ggplot2 3.3.6 ✔ purrr 0.3.4
#> ✔ tibble 3.1.8 ✔ dplyr 1.0.10
#> ✔ tidyr 1.2.0 ✔ stringr 1.5.0
#> ✔ readr 2.1.1 ✔ forcats 0.5.1
#> Warning: package 'tidyr' was built under R version 4.0.5
#> ── Conflicts ────────────────────────────────────────── tidyverse_conflicts() ──
#> ✖ dplyr::filter() masks stats::filter()
#> ✖ dplyr::lag() masks stats::lag()
library(tidylog)
#>
#> Attaching package: 'tidylog'
#>
#> The following objects are masked from 'package:dplyr':
#>
#> add_count, add_tally, anti_join, count, distinct, distinct_all,
#> distinct_at, distinct_if, filter, filter_all, filter_at, filter_if,
#> full_join, group_by, group_by_all, group_by_at, group_by_if,
#> inner_join, left_join, mutate, mutate_all, mutate_at, mutate_if,
#> relocate, rename, rename_all, rename_at, rename_if, rename_with,
#> right_join, sample_frac, sample_n, select, select_all, select_at,
#> select_if, semi_join, slice, slice_head, slice_max, slice_min,
#> slice_sample, slice_tail, summarise, summarise_all, summarise_at,
#> summarise_if, summarize, summarize_all, summarize_at, summarize_if,
#> tally, top_frac, top_n, transmute, transmute_all, transmute_at,
#> transmute_if, ungroup
#>
#> The following objects are masked from 'package:tidyr':
#>
#> drop_na, fill, gather, pivot_longer, pivot_wider, replace_na,
#> spread, uncount
#>
#> The following object is masked from 'package:stats':
#>
#> filter
library(sp)
library(raster)
#>
#> Attaching package: 'raster'
#>
#> The following object is masked from 'package:tidylog':
#>
#> select
#>
#> The following object is masked from 'package:dplyr':
#>
#> select
#>
#> The following object is masked from 'package:tidyr':
#>
#> extract
library(terra)
#> terra version 1.3.22
#>
#> Attaching package: 'terra'
#>
#> The following object is masked from 'package:dplyr':
#>
#> src
library(devtools)
#> Loading required package: usethis
# Source code for map plots
source_url("https://raw.githubusercontent.com/maxlindmark/marine-litter/main/R/functions/map-plot.R")
#> SHA-1 hash of file is 7dd050eb6d584c4f200cdf9c1d93ae83b8ecdecb
#> Warning: package 'sf' was built under R version 4.0.5
#> Linking to GEOS 3.9.1, GDAL 3.4.0, PROJ 8.1.1; sf_use_s2() is TRUE
#> Spherical geometry (s2) switched off
West pred grid
# Read west coast data
litterw <- readr::read_csv("https://raw.githubusercontent.com/maxlindmark/marine-litter/main/data/west_coast_litter.csv")
#> Rows: 371 Columns: 23
#> ── Column specification ────────────────────────────────────────────────────────
#> Delimiter: ","
#> chr (4): haul_id, ansse_area, trend_area, var
#> dbl (18): plast_a, sup_a, fishery_a, tot_a, plast_b, quarter, st_no, haul_no...
#> lgl (1): balse_area
#>
#> ℹ Use `spec()` to retrieve the full column specification for this data.
#> ℹ Specify the column types or set `show_col_types = FALSE` to quiet this message.
x <- litterw$X
y <- litterw$Y
z <- chull(x, y)
coords <- cbind(x[z], y[z])
coords <- rbind(coords, coords[1, ])
plot(coords[, 1] ~ coords[, 2]) # plot data

sp_poly <- sp::SpatialPolygons(
list(sp::Polygons(list(sp::Polygon(coords)), ID = 1))
)
sp_poly_df <- sp::SpatialPolygonsDataFrame(sp_poly,
data = data.frame(ID = 1)
)
class(sp_poly_df)
#> [1] "SpatialPolygonsDataFrame"
#> attr(,"package")
#> [1] "sp"
class(sp_poly)
#> [1] "SpatialPolygons"
#> attr(,"package")
#> [1] "sp"
plot(sp_poly)
plot(sp_poly_df)

cell_width <- 2 # 2*2 km grid cell
pred_grid <- expand.grid(
X = seq(min(litterw$X), max(litterw$X), cell_width),
Y = seq(min(litterw$Y), max(litterw$Y), cell_width),
year = unique(litterw$year)
)
ggplot(pred_grid %>% filter(year == 2019), aes(X, Y)) +
geom_point(size = 0.1) +
theme_void() +
coord_sf()
#> filter: removed 48,240 rows (83%), 9,648 rows remaining

sp::coordinates(pred_grid) <- c("X", "Y")
inside <- !is.na(sp::over(pred_grid, as(sp_poly_df, "SpatialPolygons")))
pred_grid <- pred_grid[inside, ]
pred_grid <- as.data.frame(pred_grid)
plot_map_west +
geom_point(data = pred_grid, aes(X*1000, Y*1000), size = 0.001, alpha = 0.5) +
facet_wrap(~year, ncol = 3) +
geom_sf(size = 0.1) +
NULL

# Add lat and lon
xy <- as.matrix(pred_grid %>% dplyr::select(X, Y) %>% mutate(X = X*1000, Y = Y*1000))
#> mutate: changed 25,362 values (100%) of 'X' (0 new NA)
#> changed 25,362 values (100%) of 'Y' (0 new NA)
v <- vect(xy, crs="+proj=utm +zone=33 +datum=WGS84 +units=m")
y <- project(v, "+proj=longlat +datum=WGS84")
lonlat <- geom(y)[, c("x", "y")]
pred_grid$lon <- lonlat[, 1]
pred_grid$lat <- lonlat[, 2]
ggplot(filter(pred_grid, year == 2017), aes(lon, lat)) + geom_point()
#> filter: removed 21,135 rows (83%), 4,227 rows remaining

# Add ocean area
# https://stackoverflow.com/questions/34272309/extract-shapefile-value-to-point-with-r
# https://gis.ices.dk/sf/
shape <- shapefile("data/assessment_areas_marine_waters/ANSSE_assessmentareas_20181116.shp")
#> Warning in OGRSpatialRef(dsn, layer, morphFromESRI = morphFromESRI, dumpSRS
#> = dumpSRS, : Discarded datum European_Terrestrial_Reference_System_1989 in
#> CRS definition: +proj=laea +lat_0=52 +lon_0=10 +x_0=4321000 +y_0=3210000
#> +ellps=GRS80 +units=m +no_defs
plot(shape)

shape2 <- spTransform(shape, crs("+proj=longlat +datum=WGS84 +no_defs"))
plot(shape2)

pts <- SpatialPoints(cbind(pred_grid$lon, pred_grid$lat),
proj4string = CRS("+proj=longlat +datum=WGS84 +no_defs"))
pred_grid$area <- over(pts, shape2)$MarUnitID
ggplot(filter(pred_grid, year == 2017), aes(lon, lat, color = area)) + geom_point()
#> filter: removed 21,135 rows (83%), 4,227 rows remaining

# Add depth
dep_raster <- terra::rast("data/Mean depth natural colour (with land).nc")
crs(dep_raster, proj = TRUE)
#> [1] "+proj=longlat +datum=WGS84 +no_defs"
plot(dep_raster)

pred_grid$depth <- terra::extract(dep_raster, pred_grid %>% dplyr::select(lon, lat))$elevation
ggplot(pred_grid, aes(lon, lat, color = depth*-1)) +
geom_point()

pred_grid$depth <- pred_grid$depth*-1
pred_grid <- pred_grid %>% drop_na(depth)
#> drop_na: removed 492 rows (2%), 24,870 rows remaining
plot_map_west +
theme_plot(base_size = 14) +
geom_point(data = pred_grid, aes(X*1000, Y*1000, color = depth, alpha = area)) +
geom_sf(size = 0.1)
#> Warning: Using alpha for a discrete variable is not advised.

# Save
write_csv(pred_grid, "data/pred_grid_west.csv")
East pred grid
# Read east coast data
littere <- readr::read_csv("https://raw.githubusercontent.com/maxlindmark/marine-litter/main/data/east_coast_litter.csv")
#> Rows: 368 Columns: 23
#> ── Column specification ────────────────────────────────────────────────────────
#> Delimiter: ","
#> chr (4): haul_id, balse_area, trend_area, var
#> dbl (18): plast_a, sup_a, fishery_a, tot_a, plast_b, quarter, st_no, haul_no...
#> lgl (1): ansse_area
#>
#> ℹ Use `spec()` to retrieve the full column specification for this data.
#> ℹ Specify the column types or set `show_col_types = FALSE` to quiet this message.
x <- littere$X
y <- littere$Y
z <- chull(x, y)
coords <- cbind(x[z], y[z])
coords <- rbind(coords, coords[1, ])
plot(coords[, 1] ~ coords[, 2]) # plot data

sp_poly <- sp::SpatialPolygons(
list(sp::Polygons(list(sp::Polygon(coords)), ID = 1))
)
sp_poly_df <- sp::SpatialPolygonsDataFrame(sp_poly,
data = data.frame(ID = 1)
)
class(sp_poly_df)
#> [1] "SpatialPolygonsDataFrame"
#> attr(,"package")
#> [1] "sp"
class(sp_poly)
#> [1] "SpatialPolygons"
#> attr(,"package")
#> [1] "sp"
plot(sp_poly)
plot(sp_poly_df)

cell_width <- 2 # 2*2 km grid cell
pred_grid <- expand.grid(
X = seq(min(littere$X), max(littere$X), cell_width),
Y = seq(min(littere$Y), max(littere$Y), cell_width),
year = unique(littere$year)
)
# ggplot(pred_grid %>% filter(year == 2019), aes(X, Y)) +
# geom_point(size = 0.1) +
# theme_void() +
# coord_sf()
sp::coordinates(pred_grid) <- c("X", "Y")
inside <- !is.na(sp::over(pred_grid, as(sp_poly_df, "SpatialPolygons")))
pred_grid <- pred_grid[inside, ]
pred_grid <- as.data.frame(pred_grid)
# plot_map_east +
# geom_point(data = pred_grid, aes(X*1000, Y*1000), size = 0.001, alpha = 0.5) +
# facet_wrap(~year, ncol = 3) +
# geom_sf(size = 0.1) +
# NULL
# Add lat and lon
xy <- as.matrix(pred_grid %>% dplyr::select(X, Y) %>% mutate(X = X*1000, Y = Y*1000))
#> mutate: changed 93,480 values (100%) of 'X' (0 new NA)
#> changed 93,480 values (100%) of 'Y' (0 new NA)
v <- vect(xy, crs="+proj=utm +zone=33 +datum=WGS84 +units=m")
y <- project(v, "+proj=longlat +datum=WGS84")
lonlat <- geom(y)[, c("x", "y")]
pred_grid$lon <- lonlat[, 1]
pred_grid$lat <- lonlat[, 2]
ggplot(filter(pred_grid, year == 2017), aes(lon, lat)) + geom_point()
#> filter: removed 77,900 rows (83%), 15,580 rows remaining

# Add ocean area
# https://stackoverflow.com/questions/34272309/extract-shapefile-value-to-point-with-r
# https://gis.ices.dk/sf/
shape <- shapefile("data/assessment_areas_marine_waters/BALSE_assessmentareas_20181116.shp")
#> Warning in OGRSpatialRef(dsn, layer, morphFromESRI = morphFromESRI, dumpSRS
#> = dumpSRS, : Discarded datum European_Terrestrial_Reference_System_1989 in
#> CRS definition: +proj=laea +lat_0=52 +lon_0=10 +x_0=4321000 +y_0=3210000
#> +ellps=GRS80 +units=m +no_defs
plot(shape)

shape2 <- spTransform(shape, crs("+proj=longlat +datum=WGS84 +no_defs"))
plot(shape2)

pts <- SpatialPoints(cbind(pred_grid$lon, pred_grid$lat),
proj4string = CRS("+proj=longlat +datum=WGS84 +no_defs"))
pred_grid$area <- over(pts, shape2)$MarUnitID
ggplot(filter(pred_grid, year == 2017), aes(lon, lat, color = area)) + geom_point()
#> filter: removed 77,900 rows (83%), 15,580 rows remaining

# Add depth
dep_raster <- terra::rast("data/Mean depth natural colour (with land).nc")
crs(dep_raster, proj = TRUE)
#> [1] "+proj=longlat +datum=WGS84 +no_defs"
plot(dep_raster)

pred_grid$depth <- terra::extract(dep_raster, pred_grid %>% dplyr::select(lon, lat))$elevation
ggplot(pred_grid, aes(lon, lat, color = depth*-1)) +
geom_point()

pred_grid$depth <- pred_grid$depth*-1
pred_grid <- pred_grid %>% drop_na(depth)
#> drop_na: removed 12,600 rows (13%), 80,880 rows remaining
plot_map_east +
theme_plot(base_size = 14) +
geom_point(data = pred_grid, aes(X*1000, Y*1000, color = depth, alpha = area)) +
geom_sf(size = 0.1)
#> Warning: Using alpha for a discrete variable is not advised.

# Save
write_csv(pred_grid, "data/pred_grid_east.csv")